import numpy as np
import pandas as pd
from sklearn.preprocessing import StandardScaler
import matplotlib.pyplot as plt
import seaborn as sns
%matplotlib inline
import warnings
warnings.filterwarnings('ignore')
import tensorflow as tf
from tensorflow import keras
import umap.umap_ as umap
%config InlineBackend.figure_format = 'svg'
np.random.seed(42)
pd.set_option('display.max_columns', None)
pd.set_option('display.max_rows', 100)
data=pd.read_csv('Preprocessed_DM_xx.csv')
np.random.seed(42)
data=data.sample(frac=1) #Shuffle the data set
np.random.seed(42)
HTN_indexes=data.loc[(data['Currently.taking.a.prescribed.medicine.to.lower.BP'] != 0) | (data['First.SYSTOLIC.reading'] >= 140) | (data['First.DIASTOLIC.reading'] >= 90) ].index.values
HTN_cols=np.zeros(data.shape[0])
HTN_cols[[HTN_indexes]]=1
data['HTN']=HTN_cols
data=data.drop(["First.SYSTOLIC.reading","First.DIASTOLIC.reading","Currently.taking.a.prescribed.medicine.to.lower.BP"], axis=1)
data=data.reset_index(drop=True)
data.columns
data=data.drop(["Hb_adjust_alt_smok","Second.SYSTOLIC.reading","Second.DIASTOLIC.reading","Third.SYSTOLIC.reading","Third.DIASTOLIC.reading","Hb_status","Glucose.level",'SBP_status'], axis=1)
data=data.loc[data['BMI'] != 99.99]
data=data.loc[data['Hemoglobin.level..g.dl...1.decimal.'] != 99.99]
data=data.loc[data['Currently.has.asthma'] != .5]
data=data.loc[data['Currently.has.thyroid.disorder'] != .5]
data=data.loc[data['Currently.has.heart.disease'] != .5]
data=data.loc[data['Currently.has.cancer'] != .5]
data=data.loc[data['DM_history'] == 1]
data=data.loc[data['Type.of.caste.or.tribe.of.the.household.head'] != 0]
data=data.loc[data['Time.to.get.to.water.source..minutes.'] != -1]
data=data.drop(["Unnamed: 0","DM_status","DM_history"], axis=1)
np.random.seed(42)
i=[x for x in range(10125)]
data.set_index(pd.Series(i), inplace=True) # Reset the index
Creating 2 new dataframes: "data_disease" with features related to disease and "data_others" with rest of the features
data_disease= data[['Currently.has.asthma',
'Currently.has.thyroid.disorder', 'Currently.has.heart.disease',
'Currently.has.cancer', 'Suffers.from.TB','HTN']]
data_others= data[['Drinks.alcohol', 'Smoking_stat','Has.refrigerator',
'Has.bicycle', 'Has.motorcycle.scooter', 'Has.car.truck', 'Owns.livestock..herds.or.farm.animals','Frequency.takes.milk.or.curd',
'Frequency.eats.pulses.or.beans',
'Frequency.eats.dark.green.leafy.vegetable', 'Frequency.eats.fruits',
'Frequency.eats.eggs', 'Frequency.eats.fish',
'Frequency.eats.chicken.or.meat', 'Frequency.eats.fried.food',
'Frequency.takes.aerated.drinks','Frequency.household.members.smoke.inside.the.house','Wealth.index',
'Highest.educational.level', 'Current.age','BMI','Hemoglobin.level..g.dl...1.decimal.','Time.to.get.to.water.source..minutes.', 'Household.head.s.religion', 'Sex', 'Type.of.place.of.residence', 'Household.structure',
'Type.of.caste.or.tribe.of.the.household.head','Type.of.cooking.fuel','Source.of.drinking.water']]
def feature_clustering(UMAP_neb,min_dist_UMAP, metric, data, visual):
import umap.umap_ as umap
np.random.seed(42)
data_embedded = umap.UMAP(n_neighbors=UMAP_neb, min_dist=min_dist_UMAP, n_components=2, metric=metric, random_state=42).fit_transform(data)
data_embedded[:,0]=(data_embedded[:,0]- np.mean(data_embedded[:,0]))/np.std(data_embedded[:,0])
data_embedded[:,1]=(data_embedded[:,1]- np.mean(data_embedded[:,1]))/np.std(data_embedded[:,1])
result = pd.DataFrame(data = data_embedded ,
columns = ['UMAP_0', 'UMAP_1'])
if visual==1:
sns.lmplot( x="UMAP_0", y="UMAP_1",data=result,fit_reg=False,legend=False,scatter_kws={"s": 3},palette=customPalette_set1) # specify the point size
#plt.savefig('clusters_umap_all.png', dpi=700, bbox_inches='tight')
plt.show()
else:
pass
return result
ord_list=['Drinks.alcohol', 'Smoking_stat','Has.refrigerator',
'Has.bicycle', 'Has.motorcycle.scooter', 'Has.car.truck', 'Owns.livestock..herds.or.farm.animals','Frequency.takes.milk.or.curd',
'Frequency.eats.pulses.or.beans',
'Frequency.eats.dark.green.leafy.vegetable', 'Frequency.eats.fruits',
'Frequency.eats.eggs', 'Frequency.eats.fish',
'Frequency.eats.chicken.or.meat', 'Frequency.eats.fried.food',
'Frequency.takes.aerated.drinks','Frequency.household.members.smoke.inside.the.house','Wealth.index',
'Highest.educational.level' ]
cont_list=['Current.age','BMI','Hemoglobin.level..g.dl...1.decimal.','Time.to.get.to.water.source..minutes.']
nom_list=['Household.head.s.religion', 'Sex', 'Type.of.place.of.residence', 'Household.structure',
'Type.of.caste.or.tribe.of.the.household.head','Type.of.cooking.fuel','Source.of.drinking.water']
def FDC(data,cont_list,nom_list,ord_list,cont_metric, ord_metric, nom_metric, drop_nominal, visual):
np.random.seed(42)
colors_set1 = ["lightcoral", "lightseagreen", "mediumorchid", "orange", "burlywood", "cornflowerblue", "plum", "yellowgreen"]
customPalette_set1 = sns.set_palette(sns.color_palette(colors_set1))
cont_df=data[cont_list]
nom_df=data[nom_list]
ord_df=data[ord_list]
cont_emb=feature_clustering(30,0.1, cont_metric, cont_df, 0) #Reducing continueous features into 2dim
ord_emb=feature_clustering(30,0.1, ord_metric, ord_df, 0) #Reducing ordinal features into 2dim
nom_emb=feature_clustering(30,0.1, nom_metric, nom_df, 0) #Reducing nominal features into 2dim
if drop_nominal==1:
result_concat=pd.concat([ord_emb, cont_emb, nom_emb.drop(['UMAP_1'],axis=1)],axis=1) #concatinating all reduced dimensions to get 5D embedding(1D from nominal)
else:
result_concat=pd.concat([ord_emb, cont_emb, nom_emb],axis=1)
data_embedded = umap.UMAP(n_neighbors=30, min_dist=0.001, n_components=2, metric='euclidean', random_state=42).fit_transform(result_concat) #reducing 5D embedding to 2D using UMAP
result_reduced = pd.DataFrame(data = data_embedded ,
columns = ['UMAP_0', 'UMAP_1'])
if visual==1:
sns.lmplot( x="UMAP_0", y="UMAP_1",data=result_reduced,fit_reg=False,legend=False,scatter_kws={"s": 3},palette=customPalette_set1) # specify the point size
plt.show()
#plt.savefig('clusters_umap_all.png', dpi=700, bbox_inches='tight')
else:
pass
return result_concat, result_reduced #returns both 5D and 2D embedding
# applying Feature Distributed Clustering(FDC) on entire 10125 data with all features except disease features
entire_data_FDC_emb_five,entire_data_FDC_emb_two=FDC(data_others,cont_list,nom_list,ord_list,'euclidean','canberra','hamming',1,1)
def Kmeans(no_of_clusters,thirty_d_embedding, two_d_embedding,visual,pal):
np.random.seed(42)
from sklearn.cluster import KMeans
kmeans = KMeans(n_clusters=no_of_clusters)
clusters=kmeans.fit_predict(thirty_d_embedding)
(values,counts) = np.unique(clusters,return_counts=True)
two_d_embedding['Cluster'] = clusters
if visual==1:
sns.lmplot( x="UMAP_0", y="UMAP_1",
data=two_d_embedding,
fit_reg=False,
legend=True,
hue='Cluster', # color by cluster
scatter_kws={"s": 3},palette=pal) # specify the point size
plt.savefig('k-means_ref_30dim.png', dpi=700, bbox_inches='tight')
plt.show()
else:
pass
return two_d_embedding.Cluster.to_list(),counts
#setting color palette for visualization of clusters
colors_set1 = ['lightcoral','cornflowerblue','orange','mediumorchid', 'lightseagreen','olive', 'chocolate','steelblue']
customPalette_set1 = sns.set_palette(sns.color_palette(colors_set1))
#Applying clustering algorithm on FDC embeddings from entire data
entire_data_cluster_list,entire_data_cluster_counts=Kmeans(4,data_others,entire_data_FDC_emb_two,1,customPalette_set1)
#Getting noise indices
non_noise_indices= np.where(np.array(entire_data_cluster_list)!=-1)
#Removing noise/outlires from FDC embedding and from entire data
entire_data_FDC_emb_five= entire_data_FDC_emb_five.iloc[non_noise_indices]
entire_data_FDC_emb_two= entire_data_FDC_emb_two.iloc[non_noise_indices]
entire_data_cluster_list= np.array(entire_data_cluster_list)[non_noise_indices]
data_others= data_others.iloc[non_noise_indices]
#Creating new cloumn for storing cluster labels
data_others['cluster_labels']= entire_data_cluster_list
#getting binary representation for cluster labels
data_others= pd.get_dummies(data=data_others, columns=['cluster_labels'])
#Getting column names of encoded cluster labels
cluster_column_names=data_others.columns[-len(np.unique(entire_data_cluster_list)):].to_list()
#75% of entire data for training
np.random.seed(42)
data=data_others.sample(frac=0.75) # Training data
#Another 25% of entire data for validation
np.random.seed(42)
data_val=data_others.drop(data.index) # Validation data
#Dividing training data into three folds
np.random.seed(42)
df_1=data.sample(frac=0.33) #fold 1
df=data.drop(df_1.index)
df_2=df.sample(frac=0.51) #fold 2
df_3=df.drop(df_2.index) #fold 3
np.random.seed(42)
#Possible combinations of concating 2 folds for training and using remaining fold for testing
training_folds=[pd.concat([df_1,df_2],axis=0), pd.concat([df_2,df_3],axis=0), pd.concat([df_3,df_1],axis=0)]
testing_folds=[df_3,df_1,df_2]
def neural_network(n_features,hidden_dim1,hidden_dim2,out_emb_size,act1,act2,loss):
np.random.seed(42)
tf.random.set_seed(42)
model=keras.Sequential([
keras.layers.Dense(hidden_dim1,input_dim=n_features,activation=act1),
keras.layers.Dense(hidden_dim1,activation=act2),
keras.layers.Dense(out_emb_size)])
model.compile(optimizer="adam" ,
loss=loss,
metrics=['mse'])
return model
def cluster_incidence_matrix_mod(cluster_list_new):
np.random.seed(42)
matrix=np.zeros((len(cluster_list_new),len(cluster_list_new)))
for i in range(len(cluster_list_new)):
for j in range(len(cluster_list_new)):
if cluster_list_new[i]==cluster_list_new[j]:
matrix[i,j]=1
else:
pass
return matrix
#Function for decoding the encoded cluster labels
def label_decoder(label_dataframe):
label_array=np.array(label_dataframe)
decoded_labels=[]
for i in label_array:
max_val=np.argmax(i)
decoded_labels.append(max_val)
return decoded_labels
colnames=[]
for i in range(len(entire_data_FDC_emb_five.columns)):
colnames.append('c'+str(i+1))
np.random.seed(42)
count=0
fold_readings=[]
while count<3:
FDC_emb_five_train=entire_data_FDC_emb_five.loc[list(training_folds[count].index)] #3D FDC embedding of training folds from entire training data
FDC_emb_two_train=entire_data_FDC_emb_two.loc[list(training_folds[count].index)] #2D embedding of training folds from entire training data
FDC_emb_five_train.columns=colnames
#Thirty dimensional data of training fold as features_matrix(X_train)
features_matrix=np.array(training_folds[count].drop(cluster_column_names, axis=1,inplace=False)) #X_train
#three dimensional FDC embedding of training fold as target_matrix(y_train)
target_matrix=np.array(FDC_emb_five_train) #y_train
#Train a neural network to get five dimensional embedding
model_1=neural_network(len(features_matrix[0]),int(0.6*len(features_matrix[0])),int(0.36*len(features_matrix[0])),len(target_matrix[0]),"relu","sigmoid","mse")
history=model_1.fit(features_matrix,target_matrix,epochs=30,batch_size=8)
print('\n')
print('Training history across epochs for fold ',count+1)
plt.plot(history.history['mse'],'r')
plt.ylabel('mse')
plt.xlabel('epoch')
plt.show()
#Using same thirty dimensional features_matrix(X_train) from first neural network and encoded cluster labels of training fold as target_labels_matrix(y_train)
target_labels_matrix=np.array(training_folds[count].loc[:,cluster_column_names]) #y
#Train a neural network to get encoded cluster labels
model_2=neural_network(len(features_matrix[0]),int(0.6*len(features_matrix[0])),int(0.36*len(features_matrix[0])),len(target_labels_matrix[0]),"relu","softmax","mse")
history=model_2.fit(features_matrix,target_labels_matrix,epochs=30,batch_size=8)
print('\n')
print('Training history across epochs for fold ',count+1)
plt.plot(history.history['mse'],'r')
plt.ylabel('mse')
plt.xlabel('epoch')
plt.show()
#Decoding cluster labels of training fold
decoded_target_labels_matrix=label_decoder(target_labels_matrix)
#Actual encoded cluster labels of testing fold for metric calculation
ref_clusters=testing_folds[count].loc[:,cluster_column_names]
#Decoding encoded cluster labels of testing fold
decoded_ref_clusters=label_decoder(ref_clusters)
#predicting testing fold to get three dim embedding using trained model_1
testing_data=testing_folds[count].drop(cluster_column_names, axis=1,inplace=False)
predicted_5dim=pd.DataFrame(model_1.predict(testing_data), columns=colnames)
#UMAP on predicted 3D embedding
predicted_2dim=feature_clustering(30,0.01, "euclidean", predicted_5dim, 0)
#predicting testing fold to get encoded cluster labels using trained model_2
predicted_clusters=pd.DataFrame(model_2.predict(testing_data))
#Decoding predicted encoded cluster labels
decoded_predicted_clusters=label_decoder(predicted_clusters)
#concatinating training and predicted 3D embedding
concatenated_5dim=pd.concat([FDC_emb_five_train,predicted_5dim])
#UMAP on concatinated embedding
two_dim_viz=feature_clustering(30, 0.01, 'euclidean', concatenated_5dim, 0)
#Concatinating decoded cluster labels of training fold and predicted testing fold
concatenated_cluster_labels=np.concatenate([np.array(decoded_target_labels_matrix),np.array(decoded_predicted_clusters)+len(np.unique(decoded_target_labels_matrix))])
two_dim_viz['Cluster']= concatenated_cluster_labels
#Setting dark colors for training folds
darkerhues=['lightcoral','cornflowerblue','orange','mediumorchid', 'lightseagreen','olive', 'chocolate','steelblue']
colors_set2=[]
for i in range(len(np.unique(decoded_target_labels_matrix))):
colors_set2.append(darkerhues[i])
#Concatinating dark colors for training folds and corresponding light colors for testing folds
colors_set2=colors_set2+["lightpink", 'skyblue', 'wheat', "plum","paleturquoise", "lightgreen", 'burlywood','lightsteelblue']
print('Vizualization for FDC for training fold (shown in dark hue) '+str(count+1) + 'and predicted clusters from neural network on testing fold (shown in corresponding light hues) '+str(count+1))
#visualizing the clusters of both training and testing folds
sns.lmplot( x="UMAP_0", y="UMAP_1", data=two_dim_viz, fit_reg=False, legend=False, hue='Cluster', scatter_kws={"s": 3},palette=sns.set_palette(sns.color_palette(colors_set2)))
plt.show()
#Metric calculation
CIM_predicted=cluster_incidence_matrix_mod(np.array(decoded_predicted_clusters))#Cluster incidence metric for predicted clusters
CIM_reference=cluster_incidence_matrix_mod(np.array(decoded_ref_clusters))#Cluster incidence metric for reference clusters
Product=np.dot(CIM_predicted,CIM_reference)
cluster_incdences_in_data=np.sum(CIM_reference,axis=1)
mean_points_in_same_clusters=np.mean(np.diagonal(Product)/cluster_incdences_in_data)
fold_readings.append(mean_points_in_same_clusters*100)
print("Average percentage of patients belongs to the same cluster is: {}%".format(mean_points_in_same_clusters*100))
print('\n')
count+=1
print('\n')
print('\n')
Epoch 1/30 638/638 [==============================] - 1s 997us/step - loss: 0.7376 - mse: 0.7376 Epoch 2/30 638/638 [==============================] - 1s 1ms/step - loss: 0.4387 - mse: 0.4387 Epoch 3/30 638/638 [==============================] - 1s 1ms/step - loss: 0.3622 - mse: 0.3622 Epoch 4/30 638/638 [==============================] - 1s 1ms/step - loss: 0.3286 - mse: 0.3286 Epoch 5/30 638/638 [==============================] - 1s 1ms/step - loss: 0.3076 - mse: 0.3076 Epoch 6/30 638/638 [==============================] - 1s 989us/step - loss: 0.2948 - mse: 0.2948 Epoch 7/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2838 - mse: 0.2838 Epoch 8/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2769 - mse: 0.2769 Epoch 9/30 638/638 [==============================] - 1s 995us/step - loss: 0.2704 - mse: 0.2704 Epoch 10/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2645 - mse: 0.2645 Epoch 11/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2602 - mse: 0.2602 Epoch 12/30 638/638 [==============================] - 1s 984us/step - loss: 0.2570 - mse: 0.2570 Epoch 13/30 638/638 [==============================] - 1s 988us/step - loss: 0.2522 - mse: 0.2522 Epoch 14/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2489 - mse: 0.2489 Epoch 15/30 638/638 [==============================] - 1s 984us/step - loss: 0.2454 - mse: 0.2454 Epoch 16/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2421 - mse: 0.2421 Epoch 17/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2394 - mse: 0.2394 Epoch 18/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2377 - mse: 0.2377 Epoch 19/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2354 - mse: 0.2354 Epoch 20/30 638/638 [==============================] - 1s 986us/step - loss: 0.2329 - mse: 0.2329 Epoch 21/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2313 - mse: 0.2313 Epoch 22/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2298 - mse: 0.2298 Epoch 23/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2271 - mse: 0.2271 Epoch 24/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2241 - mse: 0.2241 Epoch 25/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2216 - mse: 0.2216 Epoch 26/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2196 - mse: 0.2196 Epoch 27/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2171 - mse: 0.2171 Epoch 28/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2150 - mse: 0.2150 Epoch 29/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2122 - mse: 0.2122 Epoch 30/30 638/638 [==============================] - 1s 1ms/step - loss: 0.2090 - mse: 0.2090 Training history across epochs for fold 1
Epoch 1/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0872 - mse: 0.0872 Epoch 2/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0389 - mse: 0.0389 Epoch 3/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0299 - mse: 0.0299 Epoch 4/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0258 - mse: 0.0258 Epoch 5/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0238 - mse: 0.0238 Epoch 6/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0226 - mse: 0.0226 Epoch 7/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0214 - mse: 0.0214 Epoch 8/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0209 - mse: 0.0209 Epoch 9/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0203 - mse: 0.0203 Epoch 10/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0197 - mse: 0.0197 Epoch 11/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0194 - mse: 0.0194 Epoch 12/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0190 - mse: 0.0190 Epoch 13/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0187 - mse: 0.0187 Epoch 14/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0182 - mse: 0.0182 Epoch 15/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0181 - mse: 0.0181 Epoch 16/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0180 - mse: 0.0180 Epoch 17/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0182 - mse: 0.0182 Epoch 18/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0180 - mse: 0.0180 Epoch 19/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0172 - mse: 0.0172 Epoch 20/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0172 - mse: 0.0172 Epoch 21/30 638/638 [==============================] - 1s 992us/step - loss: 0.0171 - mse: 0.0171 Epoch 22/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0167 - mse: 0.0167 Epoch 23/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0166 - mse: 0.0166 Epoch 24/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0166 - mse: 0.0166 Epoch 25/30 638/638 [==============================] - 1s 996us/step - loss: 0.0165 - mse: 0.0165 Epoch 26/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0163 - mse: 0.0163 Epoch 27/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0159 - mse: 0.0159 Epoch 28/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0160 - mse: 0.0160 Epoch 29/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0156 - mse: 0.0156 Epoch 30/30 638/638 [==============================] - 1s 1ms/step - loss: 0.0154 - mse: 0.0154 Training history across epochs for fold 1
78/78 [==============================] - 0s 609us/step 78/78 [==============================] - 0s 609us/step Vizualization for FDC for training fold (shown in dark hue) 1and predicted clusters from neural network on testing fold (shown in corresponding light hues) 1
Average percentage of patients belongs to the same cluster is: 90.28024691862613% Epoch 1/30 636/636 [==============================] - 1s 1ms/step - loss: 0.7368 - mse: 0.7368 Epoch 2/30 636/636 [==============================] - 1s 1ms/step - loss: 0.4379 - mse: 0.4379 Epoch 3/30 636/636 [==============================] - 1s 979us/step - loss: 0.3580 - mse: 0.3580 Epoch 4/30 636/636 [==============================] - 1s 952us/step - loss: 0.3246 - mse: 0.3246 Epoch 5/30 636/636 [==============================] - 1s 967us/step - loss: 0.3044 - mse: 0.3044 Epoch 6/30 636/636 [==============================] - 1s 1000us/step - loss: 0.2908 - mse: 0.2908 Epoch 7/30 636/636 [==============================] - 1s 989us/step - loss: 0.2831 - mse: 0.2831 Epoch 8/30 636/636 [==============================] - 1s 987us/step - loss: 0.2766 - mse: 0.2766 Epoch 9/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2712 - mse: 0.2712 Epoch 10/30 636/636 [==============================] - 1s 981us/step - loss: 0.2668 - mse: 0.2668 Epoch 11/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2624 - mse: 0.2624 Epoch 12/30 636/636 [==============================] - 1s 955us/step - loss: 0.2572 - mse: 0.2572 Epoch 13/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2514 - mse: 0.2514 Epoch 14/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2461 - mse: 0.2461 Epoch 15/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2405 - mse: 0.2405 Epoch 16/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2355 - mse: 0.2355 Epoch 17/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2316 - mse: 0.2316 Epoch 18/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2271 - mse: 0.2271 Epoch 19/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2235 - mse: 0.2235 Epoch 20/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2200 - mse: 0.2200 Epoch 21/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2170 - mse: 0.2170 Epoch 22/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2148 - mse: 0.2148 Epoch 23/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2119 - mse: 0.2119 Epoch 24/30 636/636 [==============================] - 1s 977us/step - loss: 0.2083 - mse: 0.2083 Epoch 25/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2046 - mse: 0.2046 Epoch 26/30 636/636 [==============================] - 1s 1ms/step - loss: 0.2013 - mse: 0.2013 Epoch 27/30 636/636 [==============================] - 1s 1ms/step - loss: 0.1980 - mse: 0.1980 Epoch 28/30 636/636 [==============================] - 1s 1ms/step - loss: 0.1945 - mse: 0.1945 Epoch 29/30 636/636 [==============================] - 1s 1ms/step - loss: 0.1892 - mse: 0.1892 Epoch 30/30 636/636 [==============================] - 1s 1ms/step - loss: 0.1867 - mse: 0.1867 Training history across epochs for fold 2
Epoch 1/30 636/636 [==============================] - 1s 940us/step - loss: 0.0874 - mse: 0.0874 Epoch 2/30 636/636 [==============================] - 1s 944us/step - loss: 0.0413 - mse: 0.0413 Epoch 3/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0315 - mse: 0.0315 Epoch 4/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0275 - mse: 0.0275 Epoch 5/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0251 - mse: 0.0251 Epoch 6/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0235 - mse: 0.0235 Epoch 7/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0220 - mse: 0.0220 Epoch 8/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0216 - mse: 0.0216 Epoch 9/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0205 - mse: 0.0205 Epoch 10/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0202 - mse: 0.0202 Epoch 11/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0199 - mse: 0.0199 Epoch 12/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0196 - mse: 0.0196 Epoch 13/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0190 - mse: 0.0190 Epoch 14/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0188 - mse: 0.0188 Epoch 15/30 636/636 [==============================] - 1s 961us/step - loss: 0.0183 - mse: 0.0183 Epoch 16/30 636/636 [==============================] - 1s 960us/step - loss: 0.0181 - mse: 0.0181 Epoch 17/30 636/636 [==============================] - 1s 937us/step - loss: 0.0177 - mse: 0.0177 Epoch 18/30 636/636 [==============================] - 1s 985us/step - loss: 0.0178 - mse: 0.0178 Epoch 19/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0171 - mse: 0.0171 Epoch 20/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0172 - mse: 0.0172 Epoch 21/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0170 - mse: 0.0170 Epoch 22/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0169 - mse: 0.0169 Epoch 23/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0167 - mse: 0.0167 Epoch 24/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0165 - mse: 0.0165 Epoch 25/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0163 - mse: 0.0163 Epoch 26/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0160 - mse: 0.0160 Epoch 27/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0160 - mse: 0.0160 Epoch 28/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0155 - mse: 0.0155 Epoch 29/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0158 - mse: 0.0158 Epoch 30/30 636/636 [==============================] - 1s 1ms/step - loss: 0.0153 - mse: 0.0153 Training history across epochs for fold 2
79/79 [==============================] - 0s 724us/step 79/79 [==============================] - 0s 753us/step Vizualization for FDC for training fold (shown in dark hue) 2and predicted clusters from neural network on testing fold (shown in corresponding light hues) 2
Average percentage of patients belongs to the same cluster is: 91.74585314304338% Epoch 1/30 625/625 [==============================] - 1s 977us/step - loss: 0.7316 - mse: 0.7316 Epoch 2/30 625/625 [==============================] - 1s 1ms/step - loss: 0.4436 - mse: 0.4436 Epoch 3/30 625/625 [==============================] - 1s 987us/step - loss: 0.3615 - mse: 0.3615 Epoch 4/30 625/625 [==============================] - 1s 1ms/step - loss: 0.3260 - mse: 0.3260 Epoch 5/30 625/625 [==============================] - 1s 1ms/step - loss: 0.3036 - mse: 0.3036 Epoch 6/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2902 - mse: 0.2902 Epoch 7/30 625/625 [==============================] - 1s 997us/step - loss: 0.2816 - mse: 0.2816 Epoch 8/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2749 - mse: 0.2749 Epoch 9/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2710 - mse: 0.2710 Epoch 10/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2663 - mse: 0.2663 Epoch 11/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2629 - mse: 0.2629 Epoch 12/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2577 - mse: 0.2577 Epoch 13/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2523 - mse: 0.2523 Epoch 14/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2459 - mse: 0.2459 Epoch 15/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2410 - mse: 0.2410 Epoch 16/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2363 - mse: 0.2363 Epoch 17/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2309 - mse: 0.2309 Epoch 18/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2261 - mse: 0.2261 Epoch 19/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2232 - mse: 0.2232 Epoch 20/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2183 - mse: 0.2183 Epoch 21/30 625/625 [==============================] - 1s 976us/step - loss: 0.2145 - mse: 0.2145 Epoch 22/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2108 - mse: 0.2108 Epoch 23/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2063 - mse: 0.2063 Epoch 24/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2032 - mse: 0.2032 Epoch 25/30 625/625 [==============================] - 1s 1ms/step - loss: 0.2006 - mse: 0.2006 Epoch 26/30 625/625 [==============================] - 1s 1ms/step - loss: 0.1953 - mse: 0.1953 Epoch 27/30 625/625 [==============================] - 1s 1ms/step - loss: 0.1913 - mse: 0.1913 Epoch 28/30 625/625 [==============================] - 1s 1ms/step - loss: 0.1883 - mse: 0.1883 Epoch 29/30 625/625 [==============================] - 1s 1ms/step - loss: 0.1852 - mse: 0.1852 Epoch 30/30 625/625 [==============================] - 1s 1ms/step - loss: 0.1820 - mse: 0.1820 Training history across epochs for fold 3
Epoch 1/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0901 - mse: 0.0901 Epoch 2/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0417 - mse: 0.0417 Epoch 3/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0314 - mse: 0.0314 Epoch 4/30 625/625 [==============================] - 1s 1000us/step - loss: 0.0271 - mse: 0.0271 Epoch 5/30 625/625 [==============================] - 1s 999us/step - loss: 0.0250 - mse: 0.0250 Epoch 6/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0234 - mse: 0.0234 Epoch 7/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0224 - mse: 0.0224 Epoch 8/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0219 - mse: 0.0219 Epoch 9/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0211 - mse: 0.0211 Epoch 10/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0202 - mse: 0.0202 Epoch 11/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0200 - mse: 0.0200 Epoch 12/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0198 - mse: 0.0198 Epoch 13/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0193 - mse: 0.0193 Epoch 14/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0190 - mse: 0.0190 Epoch 15/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0191 - mse: 0.0191 Epoch 16/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0185 - mse: 0.0185 Epoch 17/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0181 - mse: 0.0181 Epoch 18/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0180 - mse: 0.0180 Epoch 19/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0176 - mse: 0.0176 Epoch 20/30 625/625 [==============================] - 1s 974us/step - loss: 0.0171 - mse: 0.0171 Epoch 21/30 625/625 [==============================] - 1s 973us/step - loss: 0.0171 - mse: 0.0171 Epoch 22/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0168 - mse: 0.0168 Epoch 23/30 625/625 [==============================] - 1s 998us/step - loss: 0.0168 - mse: 0.0168 Epoch 24/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0167 - mse: 0.0167 Epoch 25/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0165 - mse: 0.0165 Epoch 26/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0163 - mse: 0.0163 Epoch 27/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0159 - mse: 0.0159 Epoch 28/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0157 - mse: 0.0157 Epoch 29/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0156 - mse: 0.0156 Epoch 30/30 625/625 [==============================] - 1s 1ms/step - loss: 0.0154 - mse: 0.0154 Training history across epochs for fold 3
82/82 [==============================] - 0s 829us/step 82/82 [==============================] - 0s 771us/step Vizualization for FDC for training fold (shown in dark hue) 3and predicted clusters from neural network on testing fold (shown in corresponding light hues) 3
Average percentage of patients belongs to the same cluster is: 92.62026454017513%
print('Average percentage of patients belonging to the same cluster over all three folds:', np.mean(np.array(fold_readings)))
Average percentage of patients belonging to the same cluster over all three folds: 91.54878820061488
np.random.seed(42)
FDC_emb_five_data=entire_data_FDC_emb_five.loc[list(data.index)] #5D FDC embedding of training fold from entire data
FDC_emb_two_data=entire_data_FDC_emb_two.loc[list(data.index)] #2D embedding of training fold from entire data
FDC_emb_five_data.columns=colnames
#Thirty dimensional data of training fold as features_matrix(X_train)
features_matrix=np.array(data.drop(cluster_column_names, axis=1,inplace=False)) #X_train
#Five dimensional FDC embedding of training fold as target_matrix(y_train)
target_matrix=np.array(FDC_emb_five_data) #y_train
#Train a neural network to get five dimensional embedding
model_1=neural_network(len(features_matrix[0]),int(0.6*len(features_matrix[0])),int(0.36*len(features_matrix[0])),len(target_matrix[0]),"relu","sigmoid","mse")
history=model_1.fit(features_matrix,target_matrix,epochs=30,batch_size=8)
print('\n')
print('Training history across epochs for training data ')
plt.plot(history.history['mse'],'r')
plt.ylabel('mse')
plt.xlabel('epoch')
plt.show()
#Using same thirty dimensional features_matrix(X_train) from first neural network and encoded cluster labels of training fold as target_labels_matrix(y_train)
target_labels_matrix=np.array(data.loc[:,cluster_column_names]) #y
#Train a neural network to get encoded cluster labels
model_2=neural_network(len(features_matrix[0]),int(0.6*len(features_matrix[0])),int(0.36*len(features_matrix[0])),len(target_labels_matrix[0]),"relu","softmax","mse")
history=model_2.fit(features_matrix,target_labels_matrix,epochs=30,batch_size=8)
print('\n')
print('Training history across epochs for training data ')
plt.plot(history.history['mse'],'r')
plt.ylabel('mse')
plt.xlabel('epoch')
plt.show()
#Decoding cluster labels of training fold
decoded_target_labels_matrix=label_decoder(target_labels_matrix)
#Actual encoded cluster labels of validation data for metric calculation
ref_clusters=data_val.loc[:,cluster_column_names]
#Decoding encoded cluster labels of validation data
decoded_ref_clusters=label_decoder(ref_clusters)
#predicting validation data to get five dim embeddings using trained model_1
validation_data=data_val.drop(cluster_column_names, axis=1,inplace=False)
predicted_5dim=pd.DataFrame(model_1.predict(validation_data), columns=colnames)
#UMAP on predicted 5D embedding
predicted_2dim=feature_clustering(30,0.01, "euclidean", predicted_5dim, 0)
#predicting validation data to get encoded cluster labels using trained model_2
predicted_clusters=pd.DataFrame(model_2.predict(validation_data))
#Decoding predicted encoded cluster labels
decoded_predicted_clusters=label_decoder(predicted_clusters)
#concatinating training and predicted 5D embedding
concatenated_5dim=pd.concat([FDC_emb_five_data,predicted_5dim])
#UMAP on concatinated embedding
two_dim_viz=feature_clustering(30, 0.01, 'euclidean', concatenated_5dim, 0)
#Concatinating decoded cluster labels of training data and predicted validation data
concatenated_cluster_labels=np.concatenate([np.array(decoded_target_labels_matrix),np.array(decoded_predicted_clusters)+len(np.unique(decoded_target_labels_matrix))])
two_dim_viz['Cluster']= concatenated_cluster_labels
#Setting dark colors for training data
darkerhues=['lightcoral','cornflowerblue','orange','mediumorchid', 'lightseagreen','olive', 'chocolate','steelblue']
colors_set2=[]
for i in range(len(np.unique(decoded_target_labels_matrix))):
colors_set2.append(darkerhues[i])
#Concatinating dark colors for training data and corresponding light colors for validation data
colors_set2=colors_set2+["lightpink", 'skyblue', 'wheat', "plum","paleturquoise", "lightgreen", 'burlywood','lightsteelblue']
print('Vizualization for FDC for training data (shown in dark hue) '+ 'and predicted clusters from neural network on validation data (shown in corresponding light hues) ')
#visualizing the clusters of both training and validation data
sns.lmplot( x="UMAP_0", y="UMAP_1", data=two_dim_viz, fit_reg=False, legend=False, hue='Cluster', scatter_kws={"s": 3},palette=sns.set_palette(sns.color_palette(colors_set2)))
plt.show()
#Metric calculation
CIM_predicted=cluster_incidence_matrix_mod(np.array(decoded_predicted_clusters))#Cluster incidence metric for predicted clusters
CIM_reference=cluster_incidence_matrix_mod(np.array(decoded_ref_clusters))#Cluster incidence metric for reference clusters
Product=np.dot(CIM_predicted,CIM_reference)
cluster_incidences_in_data=np.sum(CIM_reference,axis=1)
mean_points_in_same_clusters=np.mean(np.diagonal(Product)/cluster_incidences_in_data)
fold_readings.append(mean_points_in_same_clusters*100)
print("Average percentage of patients belongs to the same cluster is: {}%".format(mean_points_in_same_clusters*100))
print('\n')
print('\n')
print('\n')
Epoch 1/30 950/950 [==============================] - 2s 997us/step - loss: 0.6432 - mse: 0.6432 Epoch 2/30 950/950 [==============================] - 1s 1ms/step - loss: 0.3759 - mse: 0.3759 Epoch 3/30 950/950 [==============================] - 1s 1ms/step - loss: 0.3215 - mse: 0.3215 Epoch 4/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2956 - mse: 0.2956 Epoch 5/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2807 - mse: 0.2807 Epoch 6/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2717 - mse: 0.2717 Epoch 7/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2631 - mse: 0.2631 Epoch 8/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2566 - mse: 0.2566 Epoch 9/30 950/950 [==============================] - 1s 985us/step - loss: 0.2506 - mse: 0.2506 Epoch 10/30 950/950 [==============================] - 1s 995us/step - loss: 0.2448 - mse: 0.2448 Epoch 11/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2409 - mse: 0.2409 Epoch 12/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2372 - mse: 0.2372 Epoch 13/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2347 - mse: 0.2347 Epoch 14/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2319 - mse: 0.2319 Epoch 15/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2288 - mse: 0.2288 Epoch 16/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2269 - mse: 0.2269 Epoch 17/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2243 - mse: 0.2243 Epoch 18/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2218 - mse: 0.2218 Epoch 19/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2191 - mse: 0.2191 Epoch 20/30 950/950 [==============================] - 1s 998us/step - loss: 0.2159 - mse: 0.2159 Epoch 21/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2127 - mse: 0.2127 Epoch 22/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2088 - mse: 0.2088 Epoch 23/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2049 - mse: 0.2049 Epoch 24/30 950/950 [==============================] - 1s 1ms/step - loss: 0.2009 - mse: 0.2009 Epoch 25/30 950/950 [==============================] - 1s 1ms/step - loss: 0.1967 - mse: 0.1967 Epoch 26/30 950/950 [==============================] - 1s 1ms/step - loss: 0.1934 - mse: 0.1934 Epoch 27/30 950/950 [==============================] - 1s 1ms/step - loss: 0.1885 - mse: 0.1885 Epoch 28/30 950/950 [==============================] - 1s 997us/step - loss: 0.1864 - mse: 0.1864 Epoch 29/30 950/950 [==============================] - 1s 1ms/step - loss: 0.1817 - mse: 0.1817 Epoch 30/30 950/950 [==============================] - 1s 1ms/step - loss: 0.1795 - mse: 0.1795 Training history across epochs for training data
Epoch 1/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0748 - mse: 0.0748 Epoch 2/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0333 - mse: 0.0333 Epoch 3/30 950/950 [==============================] - 1s 989us/step - loss: 0.0265 - mse: 0.0265 Epoch 4/30 950/950 [==============================] - 1s 994us/step - loss: 0.0238 - mse: 0.0238 Epoch 5/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0222 - mse: 0.0222 Epoch 6/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0215 - mse: 0.0215 Epoch 7/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0204 - mse: 0.0204 Epoch 8/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0202 - mse: 0.0202 Epoch 9/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0196 - mse: 0.0196 Epoch 10/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0192 - mse: 0.0192 Epoch 11/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0189 - mse: 0.0189 Epoch 12/30 950/950 [==============================] - 1s 983us/step - loss: 0.0183 - mse: 0.0183 Epoch 13/30 950/950 [==============================] - 1s 997us/step - loss: 0.0180 - mse: 0.0180 Epoch 14/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0180 - mse: 0.0180 Epoch 15/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0176 - mse: 0.0176 Epoch 16/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0175 - mse: 0.0175 Epoch 17/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0171 - mse: 0.0171 Epoch 18/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0168 - mse: 0.0168 Epoch 19/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0167 - mse: 0.0167 Epoch 20/30 950/950 [==============================] - 1s 990us/step - loss: 0.0164 - mse: 0.0164 Epoch 21/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0159 - mse: 0.0159 Epoch 22/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0155 - mse: 0.0155 Epoch 23/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0157 - mse: 0.0157 Epoch 24/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0152 - mse: 0.0152 Epoch 25/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0153 - mse: 0.0153 Epoch 26/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0149 - mse: 0.0149 Epoch 27/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0147 - mse: 0.0147 Epoch 28/30 950/950 [==============================] - 1s 987us/step - loss: 0.0146 - mse: 0.0146 Epoch 29/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0143 - mse: 0.0143 Epoch 30/30 950/950 [==============================] - 1s 1ms/step - loss: 0.0144 - mse: 0.0144 Training history across epochs for training data
80/80 [==============================] - 0s 593us/step 80/80 [==============================] - 0s 742us/step Vizualization for FDC for training data (shown in dark hue) and predicted clusters from neural network on validation data (shown in corresponding light hues)
Average percentage of patients belongs to the same cluster is: 91.52248986882553%